In [1]:
import json
import copy
import time
import random
import numpy as np
import torch
import torch.nn as nn
import torch.optim as optim
import matplotlib.pyplot as plt
from matplotlib import pyplot as plt
from torchsummary import summary
In [2]:
from nmfd_gnn import NMFD_GNN

1: set parameters¶

In [3]:
print (torch.cuda.is_available())
device = torch.device("cuda:0")
random_seed = 42
random.seed(random_seed)
torch.manual_seed(random_seed)
torch.cuda.manual_seed(random_seed)
r = random.random
True
In [4]:
#1.1: settings
M = 20                       #number of time interval in a window
missing_ratio = 0.50
file_name = "m_" + str(M) + "_missing_" + str(int(missing_ratio*100))
print (file_name)

#1.2: hyperparameters
num_epochs, batch_size, learning_rate = 200, 16, 0.001
beta_flow, beta_occ, beta_phy = 1.0, 1.0, 0.1
batch_size_vt = 16  #batch size for evaluation and test

hyper = {"n_e": num_epochs, "b_s": batch_size, "b_s_vt": batch_size_vt, "l_r": learning_rate,\
         "beta_f": beta_flow, "beta_o": beta_occ, "beta_p": beta_phy}

gnn_dim_1, gnn_dim_2, gnn_dim_3, lstm_dim = 2, 128, 128, 128
p_dim = 10    #column dimension of L1, L2
c_k = 5.5     #meter, the sum of loop width and uniform vehicle length. based on Gero and Daganzo 2008.
theta_ini = [-2.757, 4.996, -2.409, 1.638, 3.569] 

hyper_model = {"g_dim_1": gnn_dim_1, "g_dim_2": gnn_dim_2, "g_dim_3": gnn_dim_3, "l_dim": lstm_dim,\
               "p_dim": p_dim, "c_k": c_k, "theta_ini": theta_ini}
max_no_decrease = 30

#1.3: set paths
root_path = "/home/umni2/a/umnilab/users/xue120/umni4/2023_mfd_traffic/"
file_path = root_path + "2_prepare_data/" + file_name + "/"
train_path, vali_path, test_path =\
    file_path + "train.json", file_path + "vali.json", file_path + "test.json"
sensor_id_path = file_path + "sensor_id_order.json"
sensor_adj_path = file_path + "sensor_adj.json"
mean_std_path = file_path + "mean_std.json"
m_20_missing_50

2: visualization¶

In [5]:
def visualize_train_loss(total_phy_flow_occ_loss):
    plt.figure(figsize=(4,3), dpi=75)
    t_p_f_o_l = np.array(total_phy_flow_occ_loss)
    e_loss, p_loss, f_loss, o_loss = t_p_f_o_l[:,0], t_p_f_o_l[:,1], t_p_f_o_l[:,2], t_p_f_o_l[:,3]
    x = range(len(e_loss))
    plt.plot(x, p_loss, linewidth=1, label = "phy loss")
    plt.plot(x, f_loss, linewidth=1, label = "flow loss")
    plt.plot(x, o_loss, linewidth=1, label = "occ loss")
    plt.legend()
    plt.title('Loss decline on train')
    plt.xlabel('Epoch')
    plt.ylabel('Loss')
    plt.savefig(file_name + '/' + 'train_loss.png', bbox_inches = 'tight')
    plt.show()
    
def visualize_flow_loss(vali_f_mae, test_f_mae):
    plt.figure(figsize=(4,3), dpi=75)
    x = range(len(vali_f_mae))    
    plt.plot(x, vali_f_mae, linewidth=1, label="Validate")
    plt.plot(x, test_f_mae, linewidth=1, label="Test")
    plt.legend()
    plt.title('MAE of flow on validate/test')
    plt.xlabel('Epoch')
    plt.ylabel('MAE (veh/h)')
    plt.savefig(file_name + '/' + 'flow_mae.png', bbox_inches = 'tight')
    plt.show()
    
def visualize_occ_loss(vali_o_mae, test_o_mae):
    plt.figure(figsize=(4,3), dpi=75)
    x = range(len(vali_o_mae))    
    plt.plot(x, vali_o_mae, linewidth=1, label="Validate")
    plt.plot(x, test_o_mae, linewidth=1, label="Test")
    plt.legend()
    plt.title('MAE of occupancy on validate/test')
    plt.xlabel('Epoch')
    plt.ylabel('MAE')
    plt.savefig(file_name + '/' + 'occ_mae.png',bbox_inches = 'tight')
    plt.show()

3: compute the error¶

In [6]:
def MAELoss(yhat, y):
    return float(torch.mean(torch.div(torch.abs(yhat-y), 1)))

def RMSELoss(yhat, y):
    return float(torch.sqrt(torch.mean((yhat-y)**2)))

def vali_test(model, f, f_mask, o, o_mask, f_o_mean_std, b_s_vt):    
    flow_std, occ_std, n = f_o_mean_std[1], f_o_mean_std[3], len(f)
    f_mae_list, f_rmse_list, o_mae_list, o_rmse_list, num_list = list(), list(), list(), list(), list()
    for i in range(0, n, b_s_vt):
        s, e = i, np.min([i+b_s_vt, n])
        num_list.append(e-s)
        bf, bo, bf_mask, bo_mask = f[s: e], o[s: e], f_mask[s: e], o_mask[s: e]  
        bf_hat, bo_hat, bq_hat, bq_theta = model.run(bf_mask, bo_mask)
        bf_hat, bo_hat = bf_hat.cpu(), bo_hat.cpu()
        bf_mae, bf_rmse = MAELoss(bf_hat, bf)*flow_std, RMSELoss(bf_hat, bf)*flow_std
        bo_mae, bo_rmse = MAELoss(bo_hat, bo)*occ_std, RMSELoss(bo_hat, bo)*occ_std
        f_mae_list.append(bf_mae)
        f_rmse_list.append(bf_rmse)
        o_mae_list.append(bo_mae)
        o_rmse_list.append(bo_rmse)
    f_mae, o_mae = np.dot(f_mae_list, num_list)/n, np.dot(o_mae_list, num_list)/n
    f_rmse = np.sqrt(np.dot(np.multiply(f_rmse_list, f_rmse_list), num_list)/n)
    o_rmse = np.sqrt(np.dot(np.multiply(o_rmse_list, o_rmse_list), num_list)/n)
    return f_mae, f_rmse, o_mae, o_rmse

def evaluate(model, vt_f, vt_o, vt_f_m, vt_o_m, f_o_mean_std, b_s_vt): #vt: vali_test
    vt_f_mae, vt_f_rmse, vt_o_mae, vt_o_rmse  =\
         vali_test(model, vt_f, vt_f_m, vt_o, vt_o_m, f_o_mean_std, b_s_vt)
    return vt_f_mae, vt_f_rmse, vt_o_mae, vt_o_rmse

4: train¶

In [7]:
#4.1: one training epoch
def train_epoch(model, opt, criterion, train_f_x, train_f_y, train_o_x, train_o_y, hyper, flow_std_squ): 
    #f: flow; o: occupancy
    model.train()
    losses, p_losses, f_losses, o_losses = list(), list(), list(), list()
    
    beta_f, beta_o, beta_p, b_s = hyper["beta_f"], hyper["beta_o"], hyper["beta_p"], hyper["b_s"]
    n = len(train_f_x)
    print ("# batch: ", int(n/b_s))   
    
    for i in range(0, n-b_s, b_s):
        time1 = time.time()
        x_f_batch, y_f_batch = train_f_x[i: i+b_s], train_f_y[i: i+b_s]   
        x_o_batch, y_o_batch = train_o_x[i: i+b_s], train_o_y[i: i+b_s]

        opt.zero_grad() 
        y_f_hat, y_o_hat, q_hat, q_theta = model.run(x_f_batch, x_o_batch)
        
        p_loss = criterion(q_hat, q_theta).cpu()                #physical loss 
        p_loss = p_loss/flow_std_squ
        
        f_loss = criterion(y_f_hat.cpu(), y_f_batch)              #data loss of flow
        o_loss = criterion(y_o_hat.cpu(), y_o_batch)              #data loss of occupancy
        
        loss = beta_f*f_loss + beta_o*o_loss + beta_p*p_loss
        
        loss.backward()
        opt.step()
        losses.append(loss.data.numpy())
        p_losses.append(p_loss.data.numpy())
        f_losses.append(f_loss.data.numpy())
        o_losses.append(o_loss.data.numpy())
        
        if i % (64*b_s) == 0:
            print ("i_batch: ", i/b_s)
            print ("the loss for this batch: ", loss.data.numpy())
            print ("flow loss", f_loss.data.numpy())
            print ("occ loss", o_loss.data.numpy())
            time2 = time.time()
            print ("time for this batch", time2-time1)
            print ("----------------------------------")
        n_loss = float(len(losses)+0.000001)
        aver_loss = sum(losses)/n_loss
        aver_p_loss = sum(p_losses)/n_loss
        aver_f_loss = sum(f_losses)/n_loss
        aver_o_loss = sum(o_losses)/n_loss
    return aver_loss, model, aver_p_loss, aver_f_loss, aver_o_loss

#4.2: all train epochs
def train_process(model, criterion, train, vali, test, hyper, f_o_mean_std):
    total_phy_flow_occ_loss = list()
    
    n_mse_flow_occ = 0 #mse(flow) + mse(occ) for validation sets.
    
    vali_f, vali_o = vali["flow"], vali["occupancy"] 
    vali_f_m, vali_o_m = vali["flow_mask"].to(device), vali["occupancy_mask"].to(device) 
    test_f, test_o = test["flow"], test["occupancy"] 
    test_f_m, test_o_m = test["flow_mask"].to(device), test["occupancy_mask"].to(device) 
    
    l_r, n_e = hyper["l_r"], hyper["n_e"]
    opt = optim.Adam(model.parameters(), l_r, betas = (0.9,0.999), weight_decay=0.0001)
    opt_scheduler = torch.optim.lr_scheduler.MultiStepLR(opt, milestones=[150])
    
    print ("# epochs ", n_e)
    r_vali_f_mae, r_vali_o_mae, r_test_f_mae, r_test_o_mae = list(), list(), list(), list()
    r_vali_f_rmse, r_vali_o_rmse, r_test_f_rmse, r_test_o_rmse = list(), list(), list(), list()
    
    flow_std_squ = np.power(f_o_mean_std[1], 2)
    
    no_decrease = 0
    for i in range(n_e):
        print ("----------------an epoch starts-------------------")
        #time1_s = time.time()
        
        time_s = time.time()
        print ("i_epoch: ", i)
        n_train = len(train["flow"])
        number_list = copy.copy(list(range(n_train)))
        random.shuffle(number_list, random = r)
        shuffle_idx = torch.tensor(number_list)
        train_x_f, train_y_f = train["flow_mask"][shuffle_idx], train["flow"][shuffle_idx]
        train_x_o, train_y_o = train["occupancy_mask"][shuffle_idx], train["occupancy"][shuffle_idx] 
        
        
        aver_loss, model, aver_p_loss, aver_f_loss, aver_o_loss =\
            train_epoch(model, opt, criterion, train_x_f.to(device), train_y_f,\
                        train_x_o.to(device), train_y_o, hyper, flow_std_squ)
        opt_scheduler.step()
        
        total_phy_flow_occ_loss.append([aver_loss, aver_p_loss, aver_f_loss, aver_o_loss])
        print ("train loss for this epoch: ", round(aver_loss, 6))
        
        #evaluate
        b_s_vt = hyper["b_s_vt"]
        vali_f_mae, vali_f_rmse, vali_o_mae, vali_o_rmse =\
            evaluate(model, vali_f, vali_o, vali_f_m, vali_o_m, f_o_mean_std, b_s_vt)
        test_f_mae, test_f_rmse, test_o_mae, test_o_rmse =\
            evaluate(model, test_f, test_o, test_f_m, test_o_m, f_o_mean_std, b_s_vt)  
        
        r_vali_f_mae.append(vali_f_mae)
        r_test_f_mae.append(test_f_mae)
        r_vali_o_mae.append(vali_o_mae)
        r_test_o_mae.append(test_o_mae)
        r_vali_f_rmse.append(vali_f_rmse)
        r_test_f_rmse.append(test_f_rmse)
        r_vali_o_rmse.append(vali_o_rmse)
        r_test_o_rmse.append(test_o_rmse)
        
        visualize_train_loss(total_phy_flow_occ_loss)
        visualize_flow_loss(r_vali_f_mae, r_test_f_mae)
        visualize_occ_loss(r_vali_o_mae, r_test_o_mae)
        time_e = time.time()
        print ("time for this epoch", time_e - time_s)
        
        performance = {"train": total_phy_flow_occ_loss,\
                  "vali": [r_vali_f_mae, r_vali_f_rmse, r_vali_o_mae, r_vali_o_rmse],\
                  "test": [r_test_f_mae, r_test_f_rmse, r_test_o_mae, r_test_o_rmse]}
        subfile =  open(file_name + '/' + 'performance'+'.json','w')
        json.dump(performance, subfile)
        subfile.close()
        
        #early stop
        flow_std, occ_std = f_o_mean_std[1], f_o_mean_std[3]
        norm_f_rmse, norm_o_rmse = vali_f_rmse/flow_std, vali_o_rmse/occ_std
        norm_sum_mse = norm_f_rmse*norm_f_rmse + norm_o_rmse*norm_o_rmse
        
        if n_mse_flow_occ > 0:
            min_until_now = min([min_until_now, norm_sum_mse])
        else:
            min_until_now = 1000000.0  
        if norm_sum_mse > min_until_now:
            no_decrease = no_decrease+1
        else:
            no_decrease = 0
        if no_decrease == max_no_decrease:
            print ("Early stop at the " + str(i+1) + "-th epoch")
            return total_phy_flow_occ_loss, model 
        n_mse_flow_occ = n_mse_flow_occ + 1
        
        print ("No_decrease: ", no_decrease)
    return total_phy_flow_occ_loss, model    

5: prepare tensors¶

In [8]:
def tensorize(train_vali_test):
    result = dict()
    result["flow"] = torch.tensor(train_vali_test["flow"]) 
    result["flow_mask"] = torch.tensor(train_vali_test["flow_mask"])     
    result["occupancy"] = torch.tensor(train_vali_test["occupancy"]) 
    result["occupancy_mask"] = torch.tensor(train_vali_test["occupancy_mask"]) 
    return result

def normalize_flow_occ(tvt, f_o_mean_std):  #tvt: train, vali, test
    #flow
    f_mean, f_std = f_o_mean_std[0], f_o_mean_std[1]
    f_mask, f = tvt["flow_mask"], tvt["flow"]
    tvt["flow_mask"] = ((np.array(f_mask)-f_mean)/f_std).tolist()
    tvt["flow"] = ((np.array(f)-f_mean)/f_std).tolist()
    
    #occ
    o_mean, o_std = f_o_mean_std[2], f_o_mean_std[3]
    o_mask, o = tvt["occupancy_mask"], tvt["occupancy"]
    tvt["occupancy_mask"] = ((np.array(o_mask)-o_mean)/o_std).tolist()
    tvt["occupancy"] = ((np.array(o)-o_mean)/o_std).tolist()   
    return tvt

def transform_distance(d_matrix):
    sigma, n_row, n_col = np.std(d_matrix), len(d_matrix), len(d_matrix[0])
    sigma_square = sigma*sigma
    for i in range(n_row):
        for j in range(n_col):
            d_i_j = d_matrix[i][j]
            d_matrix[i][j] = np.exp(0.0-10000.0*d_i_j*d_i_j/sigma_square)
    return d_matrix

def load_data(train_path, vali_path, test_path, sensor_adj_path, mean_std_path, sensor_id_path):
    mean_std = json.load(open(mean_std_path))
    f_mean, f_std, o_mean, o_std =\
        mean_std["f_mean"], mean_std["f_std"], mean_std["o_mean"], mean_std["o_std"]
    f_o_mean_std = [f_mean, f_std, o_mean, o_std]
    
    train = json.load(open(train_path))
    vali = json.load(open(vali_path))
    test = json.load(open(test_path))
    adj = json.load(open(sensor_adj_path))["adj"]
    n_sensor = len(train["flow"][0])    
    
    train = tensorize(normalize_flow_occ(train, f_o_mean_std))
    vali = tensorize(normalize_flow_occ(vali, f_o_mean_std))
    test = tensorize(normalize_flow_occ(test, f_o_mean_std))

    adj = torch.tensor(transform_distance(adj), device=device).float()   
    
    df_sensor_id = json.load(open(sensor_id_path))
    sensor_length = [0.0 for i in range(n_sensor)]
    for sensor in df_sensor_id:
        sensor_length[df_sensor_id[sensor][0]] = df_sensor_id[sensor][3]
        
    return train, vali, test, adj, n_sensor, f_o_mean_std, sensor_length

6: main¶

In [9]:
#6.1 load the data
time1 = time.time()
train, vali, test, adj, n_sensor, f_o_mean_std, sensor_length =\
    load_data(train_path, vali_path, test_path, sensor_adj_path, mean_std_path, sensor_id_path)
time2 = time.time()
print (time2-time1)
17.001744508743286
In [10]:
print (len(train["flow"]))
print (len(vali["flow"]))
print (len(test["flow"]))
print (f_o_mean_std)
1997
653
653
[241.21586152814126, 220.92336003653475, 0.13805152810287494, 0.1920120065038222]
In [11]:
model = NMFD_GNN(n_sensor, M, hyper_model, f_o_mean_std, sensor_length, adj).to(device)   
cri = nn.MSELoss() 
In [12]:
#6.2: train the model
total_phy_flow_occ_loss, trained_model = train_process(model, cri, train, vali, test, hyper, f_o_mean_std)
# epochs  200
----------------an epoch starts-------------------
i_epoch:  0
# batch:  124
i_batch:  0.0
the loss for this batch:  1.8325729
flow loss 0.89621675
occ loss 0.82187146
time for this batch 0.6170341968536377
----------------------------------
i_batch:  64.0
the loss for this batch:  0.6144815
flow loss 0.18863225
occ loss 0.29351518
time for this batch 0.388866662979126
----------------------------------
train loss for this epoch:  0.739432
time for this epoch 59.01241898536682
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  1
# batch:  124
i_batch:  0.0
the loss for this batch:  0.5885273
flow loss 0.16203003
occ loss 0.2797379
time for this batch 0.33501148223876953
----------------------------------
i_batch:  64.0
the loss for this batch:  0.6608486
flow loss 0.16983798
occ loss 0.3286397
time for this batch 0.4064362049102783
----------------------------------
train loss for this epoch:  0.523507
time for this epoch 58.876606702804565
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  2
# batch:  124
i_batch:  0.0
the loss for this batch:  0.40579578
flow loss 0.10739661
occ loss 0.18676661
time for this batch 0.3394155502319336
----------------------------------
i_batch:  64.0
the loss for this batch:  0.47206962
flow loss 0.11769441
occ loss 0.21992302
time for this batch 0.3991215229034424
----------------------------------
train loss for this epoch:  0.480317
time for this epoch 58.40217399597168
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  3
# batch:  124
i_batch:  0.0
the loss for this batch:  0.47260636
flow loss 0.108101495
occ loss 0.23388283
time for this batch 0.3452877998352051
----------------------------------
i_batch:  64.0
the loss for this batch:  0.4711466
flow loss 0.112871625
occ loss 0.24779749
time for this batch 0.39765119552612305
----------------------------------
train loss for this epoch:  0.461577
time for this epoch 61.901811838150024
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  4
# batch:  124
i_batch:  0.0
the loss for this batch:  0.5227045
flow loss 0.11400799
occ loss 0.26134926
time for this batch 0.36124539375305176
----------------------------------
i_batch:  64.0
the loss for this batch:  0.43114558
flow loss 0.10096531
occ loss 0.21393514
time for this batch 0.43086886405944824
----------------------------------
train loss for this epoch:  0.446998
time for this epoch 62.68555212020874
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  5
# batch:  124
i_batch:  0.0
the loss for this batch:  0.46980047
flow loss 0.11033853
occ loss 0.22218437
time for this batch 0.33418726921081543
----------------------------------
i_batch:  64.0
the loss for this batch:  0.29374596
flow loss 0.07701626
occ loss 0.13169199
time for this batch 0.39302539825439453
----------------------------------
train loss for this epoch:  0.44038
time for this epoch 65.22787022590637
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  6
# batch:  124
i_batch:  0.0
the loss for this batch:  0.39930427
flow loss 0.104254514
occ loss 0.17226274
time for this batch 0.539492130279541
----------------------------------
i_batch:  64.0
the loss for this batch:  0.34984085
flow loss 0.0845439
occ loss 0.15992607
time for this batch 0.5715556144714355
----------------------------------
train loss for this epoch:  0.432499
time for this epoch 85.93762183189392
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  7
# batch:  124
i_batch:  0.0
the loss for this batch:  0.32232672
flow loss 0.08286349
occ loss 0.1614111
time for this batch 0.5128185749053955
----------------------------------
i_batch:  64.0
the loss for this batch:  0.44597045
flow loss 0.09445828
occ loss 0.21264985
time for this batch 0.5655925273895264
----------------------------------
train loss for this epoch:  0.427074
time for this epoch 84.52183818817139
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  8
# batch:  124
i_batch:  0.0
the loss for this batch:  0.49011254
flow loss 0.111543305
occ loss 0.23736158
time for this batch 0.5219006538391113
----------------------------------
i_batch:  64.0
the loss for this batch:  0.3528722
flow loss 0.082674995
occ loss 0.16454522
time for this batch 0.37086963653564453
----------------------------------
train loss for this epoch:  0.421685
time for this epoch 84.35181403160095
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  9
# batch:  124
i_batch:  0.0
the loss for this batch:  0.5373396
flow loss 0.10806548
occ loss 0.27205122
time for this batch 0.5342507362365723
----------------------------------
i_batch:  64.0
the loss for this batch:  0.5140677
flow loss 0.10004824
occ loss 0.24890625
time for this batch 0.570458173751831
----------------------------------
train loss for this epoch:  0.416269
time for this epoch 84.5655164718628
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  10
# batch:  124
i_batch:  0.0
the loss for this batch:  0.32526016
flow loss 0.07304287
occ loss 0.14621791
time for this batch 0.5117051601409912
----------------------------------
i_batch:  64.0
the loss for this batch:  0.3503657
flow loss 0.07745482
occ loss 0.15763476
time for this batch 0.5650959014892578
----------------------------------
train loss for this epoch:  0.413319
time for this epoch 84.3258159160614
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  11
# batch:  124
i_batch:  0.0
the loss for this batch:  0.43246758
flow loss 0.09228602
occ loss 0.19742222
time for this batch 0.5242660045623779
----------------------------------
i_batch:  64.0
the loss for this batch:  0.29098523
flow loss 0.070844136
occ loss 0.12781923
time for this batch 0.5532581806182861
----------------------------------
train loss for this epoch:  0.408864
time for this epoch 85.17666244506836
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  12
# batch:  124
i_batch:  0.0
the loss for this batch:  0.39346403
flow loss 0.09602988
occ loss 0.17933083
time for this batch 0.5022192001342773
----------------------------------
i_batch:  64.0
the loss for this batch:  0.500859
flow loss 0.1068501
occ loss 0.23778689
time for this batch 0.5596976280212402
----------------------------------
train loss for this epoch:  0.406306
time for this epoch 84.55423760414124
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  13
# batch:  124
i_batch:  0.0
the loss for this batch:  0.3682206
flow loss 0.100955814
occ loss 0.15884253
time for this batch 0.47971510887145996
----------------------------------
i_batch:  64.0
the loss for this batch:  0.45847112
flow loss 0.09882657
occ loss 0.21572395
time for this batch 0.5571267604827881
----------------------------------
train loss for this epoch:  0.404031
time for this epoch 83.81766867637634
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  14
# batch:  124
i_batch:  0.0
the loss for this batch:  0.44410497
flow loss 0.10140443
occ loss 0.20388505
time for this batch 0.5098800659179688
----------------------------------
i_batch:  64.0
the loss for this batch:  0.4862147
flow loss 0.10731892
occ loss 0.23490888
time for this batch 0.45563483238220215
----------------------------------
train loss for this epoch:  0.399447
time for this epoch 86.1754002571106
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  15
# batch:  124
i_batch:  0.0
the loss for this batch:  0.44450736
flow loss 0.09572962
occ loss 0.21095005
time for this batch 0.5807864665985107
----------------------------------
i_batch:  64.0
the loss for this batch:  0.41661286
flow loss 0.10194375
occ loss 0.18980454
time for this batch 0.586920976638794
----------------------------------
train loss for this epoch:  0.39893
time for this epoch 86.47107100486755
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  16
# batch:  124
i_batch:  0.0
the loss for this batch:  0.42428005
flow loss 0.0932424
occ loss 0.19155395
time for this batch 0.5133030414581299
----------------------------------
i_batch:  64.0
the loss for this batch:  0.34306848
flow loss 0.08060854
occ loss 0.15214866
time for this batch 0.5533773899078369
----------------------------------
train loss for this epoch:  0.396668
time for this epoch 84.16269207000732
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  17
# batch:  124
i_batch:  0.0
the loss for this batch:  0.3181974
flow loss 0.07676232
occ loss 0.1414236
time for this batch 0.41672539710998535
----------------------------------
i_batch:  64.0
the loss for this batch:  0.34888884
flow loss 0.07222549
occ loss 0.16495839
time for this batch 0.5787758827209473
----------------------------------
train loss for this epoch:  0.394122
time for this epoch 84.96643877029419
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  18
# batch:  124
i_batch:  0.0
the loss for this batch:  0.41504002
flow loss 0.08770894
occ loss 0.18664671
time for this batch 0.4809439182281494
----------------------------------
i_batch:  64.0
the loss for this batch:  0.35824844
flow loss 0.082396224
occ loss 0.15827265
time for this batch 0.5580668449401855
----------------------------------
train loss for this epoch:  0.393409
time for this epoch 84.41791844367981
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  19
# batch:  124
i_batch:  0.0
the loss for this batch:  0.31489667
flow loss 0.07089513
occ loss 0.15039134
time for this batch 0.5127110481262207
----------------------------------
i_batch:  64.0
the loss for this batch:  0.35490188
flow loss 0.08206008
occ loss 0.17251581
time for this batch 0.4067728519439697
----------------------------------
train loss for this epoch:  0.392643
time for this epoch 84.15514397621155
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  20
# batch:  124
i_batch:  0.0
the loss for this batch:  0.3894797
flow loss 0.07821989
occ loss 0.19263938
time for this batch 0.3826901912689209
----------------------------------
i_batch:  64.0
the loss for this batch:  0.33755466
flow loss 0.075437605
occ loss 0.15494226
time for this batch 0.5490522384643555
----------------------------------
train loss for this epoch:  0.390418
time for this epoch 83.51794743537903
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  21
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2579735
flow loss 0.06574297
occ loss 0.10903427
time for this batch 0.4808681011199951
----------------------------------
i_batch:  64.0
the loss for this batch:  0.43986455
flow loss 0.101587415
occ loss 0.20239575
time for this batch 0.5549705028533936
----------------------------------
train loss for this epoch:  0.38925
time for this epoch 82.84142088890076
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  22
# batch:  124
i_batch:  0.0
the loss for this batch:  0.51861
flow loss 0.112932414
occ loss 0.2591961
time for this batch 0.4987154006958008
----------------------------------
i_batch:  64.0
the loss for this batch:  0.4753052
flow loss 0.10329475
occ loss 0.21647339
time for this batch 0.5435469150543213
----------------------------------
train loss for this epoch:  0.388856
time for this epoch 83.77294683456421
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  23
# batch:  124
i_batch:  0.0
the loss for this batch:  0.4199681
flow loss 0.08401766
occ loss 0.1943481
time for this batch 0.47499608993530273
----------------------------------
i_batch:  64.0
the loss for this batch:  0.33947644
flow loss 0.071929455
occ loss 0.1471746
time for this batch 0.39894986152648926
----------------------------------
train loss for this epoch:  0.386442
time for this epoch 82.44995784759521
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  24
# batch:  124
i_batch:  0.0
the loss for this batch:  0.39408126
flow loss 0.091057554
occ loss 0.175594
time for this batch 0.5125844478607178
----------------------------------
i_batch:  64.0
the loss for this batch:  0.3134563
flow loss 0.07840421
occ loss 0.1369148
time for this batch 0.5084109306335449
----------------------------------
train loss for this epoch:  0.385294
time for this epoch 83.78894543647766
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  25
# batch:  124
i_batch:  0.0
the loss for this batch:  0.3678293
flow loss 0.081824124
occ loss 0.17140347
time for this batch 0.5250282287597656
----------------------------------
i_batch:  64.0
the loss for this batch:  0.3917352
flow loss 0.08160328
occ loss 0.19489354
time for this batch 0.5664491653442383
----------------------------------
train loss for this epoch:  0.38407
time for this epoch 84.4245126247406
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  26
# batch:  124
i_batch:  0.0
the loss for this batch:  0.5716673
flow loss 0.11565768
occ loss 0.27165332
time for this batch 0.48095226287841797
----------------------------------
i_batch:  64.0
the loss for this batch:  0.47119576
flow loss 0.10373575
occ loss 0.2170749
time for this batch 0.5616791248321533
----------------------------------
train loss for this epoch:  0.384619
time for this epoch 83.25258421897888
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  27
# batch:  124
i_batch:  0.0
the loss for this batch:  0.3162808
flow loss 0.0699872
occ loss 0.15703647
time for this batch 0.4985816478729248
----------------------------------
i_batch:  64.0
the loss for this batch:  0.42911828
flow loss 0.0930712
occ loss 0.19705509
time for this batch 0.5679404735565186
----------------------------------
train loss for this epoch:  0.383635
time for this epoch 83.23864364624023
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  28
# batch:  124
i_batch:  0.0
the loss for this batch:  0.38434437
flow loss 0.09029838
occ loss 0.17182669
time for this batch 0.5349509716033936
----------------------------------
i_batch:  64.0
the loss for this batch:  0.3899114
flow loss 0.08707689
occ loss 0.17943142
time for this batch 0.5657718181610107
----------------------------------
train loss for this epoch:  0.381834
time for this epoch 83.4757649898529
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  29
# batch:  124
i_batch:  0.0
the loss for this batch:  0.36776114
flow loss 0.07628368
occ loss 0.1628206
time for this batch 0.5165677070617676
----------------------------------
i_batch:  64.0
the loss for this batch:  0.5190271
flow loss 0.10248567
occ loss 0.24143887
time for this batch 0.5729541778564453
----------------------------------
train loss for this epoch:  0.381462
time for this epoch 84.22597455978394
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  30
# batch:  124
i_batch:  0.0
the loss for this batch:  0.51092005
flow loss 0.10538039
occ loss 0.2334836
time for this batch 0.49190521240234375
----------------------------------
i_batch:  64.0
the loss for this batch:  0.44246995
flow loss 0.08554385
occ loss 0.19705969
time for this batch 0.5325868129730225
----------------------------------
train loss for this epoch:  0.382628
time for this epoch 84.55201411247253
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  31
# batch:  124
i_batch:  0.0
the loss for this batch:  0.48814407
flow loss 0.09609854
occ loss 0.22412731
time for this batch 0.33892202377319336
----------------------------------
i_batch:  64.0
the loss for this batch:  0.4033553
flow loss 0.07840918
occ loss 0.1887489
time for this batch 0.5500345230102539
----------------------------------
train loss for this epoch:  0.379548
time for this epoch 83.25360655784607
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  32
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2933299
flow loss 0.063253365
occ loss 0.14578676
time for this batch 0.47564053535461426
----------------------------------
i_batch:  64.0
the loss for this batch:  0.3569315
flow loss 0.0796314
occ loss 0.15727688
time for this batch 0.567439079284668
----------------------------------
train loss for this epoch:  0.377805
time for this epoch 84.28117275238037
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  33
# batch:  124
i_batch:  0.0
the loss for this batch:  0.3852728
flow loss 0.0824926
occ loss 0.175282
time for this batch 0.5156998634338379
----------------------------------
i_batch:  64.0
the loss for this batch:  0.3363636
flow loss 0.07496626
occ loss 0.1566548
time for this batch 0.5368843078613281
----------------------------------
train loss for this epoch:  0.375043
time for this epoch 84.54397678375244
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  34
# batch:  124
i_batch:  0.0
the loss for this batch:  0.40059084
flow loss 0.09630689
occ loss 0.18995029
time for this batch 0.507075309753418
----------------------------------
i_batch:  64.0
the loss for this batch:  0.39627603
flow loss 0.08729494
occ loss 0.18147966
time for this batch 0.5647118091583252
----------------------------------
train loss for this epoch:  0.373895
time for this epoch 84.09545016288757
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  35
# batch:  124
i_batch:  0.0
the loss for this batch:  0.4362095
flow loss 0.09327476
occ loss 0.19519357
time for this batch 0.503746509552002
----------------------------------
i_batch:  64.0
the loss for this batch:  0.34782556
flow loss 0.08128125
occ loss 0.15613922
time for this batch 0.6411550045013428
----------------------------------
train loss for this epoch:  0.366385
time for this epoch 83.97909331321716
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  36
# batch:  124
i_batch:  0.0
the loss for this batch:  0.35377902
flow loss 0.074001566
occ loss 0.1737738
time for this batch 0.5049054622650146
----------------------------------
i_batch:  64.0
the loss for this batch:  0.3638198
flow loss 0.08899392
occ loss 0.17047034
time for this batch 0.5478289127349854
----------------------------------
train loss for this epoch:  0.35537
time for this epoch 84.82249665260315
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  37
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2717862
flow loss 0.06241016
occ loss 0.1281517
time for this batch 0.4116847515106201
----------------------------------
i_batch:  64.0
the loss for this batch:  0.30370462
flow loss 0.072770216
occ loss 0.1512541
time for this batch 0.553152322769165
----------------------------------
train loss for this epoch:  0.33947
time for this epoch 83.41134905815125
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  38
# batch:  124
i_batch:  0.0
the loss for this batch:  0.45990765
flow loss 0.110276595
occ loss 0.24727742
time for this batch 0.4936387538909912
----------------------------------
i_batch:  64.0
the loss for this batch:  0.3960239
flow loss 0.08552338
occ loss 0.21235473
time for this batch 0.5649080276489258
----------------------------------
train loss for this epoch:  0.317821
time for this epoch 85.34671521186829
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  39
# batch:  124
i_batch:  0.0
the loss for this batch:  0.3279369
flow loss 0.075558096
occ loss 0.1817833
time for this batch 0.3581991195678711
----------------------------------
i_batch:  64.0
the loss for this batch:  0.4023878
flow loss 0.09011148
occ loss 0.24514672
time for this batch 0.5606174468994141
----------------------------------
train loss for this epoch:  0.294768
time for this epoch 85.07291555404663
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  40
# batch:  124
i_batch:  0.0
the loss for this batch:  0.31489816
flow loss 0.08561872
occ loss 0.18720895
time for this batch 0.4771435260772705
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20005436
flow loss 0.054871947
occ loss 0.121733874
time for this batch 0.5756595134735107
----------------------------------
train loss for this epoch:  0.270046
time for this epoch 87.92265939712524
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  41
# batch:  124
i_batch:  0.0
the loss for this batch:  0.22996388
flow loss 0.061454426
occ loss 0.15236445
time for this batch 0.48435378074645996
----------------------------------
i_batch:  64.0
the loss for this batch:  0.23080909
flow loss 0.06345322
occ loss 0.15331171
time for this batch 0.568213701248169
----------------------------------
train loss for this epoch:  0.249163
time for this epoch 85.31990790367126
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  42
# batch:  124
i_batch:  0.0
the loss for this batch:  0.19678243
flow loss 0.053055037
occ loss 0.13638891
time for this batch 0.4614284038543701
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2730554
flow loss 0.069754176
occ loss 0.19762605
time for this batch 0.576434850692749
----------------------------------
train loss for this epoch:  0.238929
time for this epoch 83.56304287910461
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  43
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2485679
flow loss 0.07258584
occ loss 0.1732784
time for this batch 0.46524858474731445
----------------------------------
i_batch:  64.0
the loss for this batch:  0.33229578
flow loss 0.08634651
occ loss 0.2438268
time for this batch 0.5537283420562744
----------------------------------
train loss for this epoch:  0.233794
time for this epoch 84.98172879219055
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  44
# batch:  124
i_batch:  0.0
the loss for this batch:  0.19438767
flow loss 0.061165266
occ loss 0.1322545
time for this batch 0.49664306640625
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2545915
flow loss 0.06776179
occ loss 0.18571256
time for this batch 0.55104660987854
----------------------------------
train loss for this epoch:  0.232738
time for this epoch 85.01822066307068
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  45
# batch:  124
i_batch:  0.0
the loss for this batch:  0.32003617
flow loss 0.07729222
occ loss 0.24185947
time for this batch 0.35245275497436523
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19148402
flow loss 0.052372742
occ loss 0.1384043
time for this batch 0.5287127494812012
----------------------------------
train loss for this epoch:  0.231701
time for this epoch 83.52418160438538
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  46
# batch:  124
i_batch:  0.0
the loss for this batch:  0.22095464
flow loss 0.062012207
occ loss 0.15849857
time for this batch 0.5171504020690918
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22482507
flow loss 0.061277367
occ loss 0.16303338
time for this batch 0.5722324848175049
----------------------------------
train loss for this epoch:  0.231816
time for this epoch 85.73759365081787
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  47
# batch:  124
i_batch:  0.0
the loss for this batch:  0.19347773
flow loss 0.054576717
occ loss 0.13829875
time for this batch 0.5326368808746338
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20425886
flow loss 0.06163582
occ loss 0.14169873
time for this batch 0.5738112926483154
----------------------------------
train loss for this epoch:  0.232196
time for this epoch 82.97716498374939
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  48
# batch:  124
i_batch:  0.0
the loss for this batch:  0.20287435
flow loss 0.05837168
occ loss 0.1440246
time for this batch 0.49195075035095215
----------------------------------
i_batch:  64.0
the loss for this batch:  0.26370102
flow loss 0.06942228
occ loss 0.19380766
time for this batch 0.5384156703948975
----------------------------------
train loss for this epoch:  0.229297
time for this epoch 84.13713479042053
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  49
# batch:  124
i_batch:  0.0
the loss for this batch:  0.24488224
flow loss 0.066502795
occ loss 0.1776626
time for this batch 0.4844374656677246
----------------------------------
i_batch:  64.0
the loss for this batch:  0.24603571
flow loss 0.069106765
occ loss 0.17635241
time for this batch 0.5628104209899902
----------------------------------
train loss for this epoch:  0.230441
time for this epoch 84.25959134101868
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  50
# batch:  124
i_batch:  0.0
the loss for this batch:  0.29809016
flow loss 0.07260583
occ loss 0.22488959
time for this batch 0.5253608226776123
----------------------------------
i_batch:  64.0
the loss for this batch:  0.31168753
flow loss 0.07345627
occ loss 0.23765741
time for this batch 0.5810210704803467
----------------------------------
train loss for this epoch:  0.229593
time for this epoch 84.30539131164551
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  51
# batch:  124
i_batch:  0.0
the loss for this batch:  0.22591785
flow loss 0.06705319
occ loss 0.15846011
time for this batch 0.4254636764526367
----------------------------------
i_batch:  64.0
the loss for this batch:  0.28221527
flow loss 0.07548429
occ loss 0.20627482
time for this batch 0.5579097270965576
----------------------------------
train loss for this epoch:  0.227954
time for this epoch 83.51873421669006
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  52
# batch:  124
i_batch:  0.0
the loss for this batch:  0.20371328
flow loss 0.05557267
occ loss 0.14776838
time for this batch 0.5053398609161377
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14228614
flow loss 0.046353057
occ loss 0.095618926
time for this batch 0.548720121383667
----------------------------------
train loss for this epoch:  0.227232
time for this epoch 83.99161028862
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  53
# batch:  124
i_batch:  0.0
the loss for this batch:  0.24266607
flow loss 0.062794745
occ loss 0.1794445
time for this batch 0.5113322734832764
----------------------------------
i_batch:  64.0
the loss for this batch:  0.24234538
flow loss 0.063731514
occ loss 0.1779572
time for this batch 0.5639169216156006
----------------------------------
train loss for this epoch:  0.227456
time for this epoch 83.66939616203308
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  54
# batch:  124
i_batch:  0.0
the loss for this batch:  0.23562923
flow loss 0.06350872
occ loss 0.1713962
time for this batch 0.4445936679840088
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16878253
flow loss 0.05182514
occ loss 0.116238326
time for this batch 0.5180585384368896
----------------------------------
train loss for this epoch:  0.228002
time for this epoch 82.69022464752197
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  55
# batch:  124
i_batch:  0.0
the loss for this batch:  0.24194439
flow loss 0.06149433
occ loss 0.18000442
time for this batch 0.4951012134552002
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2099241
flow loss 0.055741545
occ loss 0.15328671
time for this batch 0.5753951072692871
----------------------------------
train loss for this epoch:  0.227324
time for this epoch 85.69332313537598
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  56
# batch:  124
i_batch:  0.0
the loss for this batch:  0.24684736
flow loss 0.064126045
occ loss 0.18240279
time for this batch 0.5245950222015381
----------------------------------
i_batch:  64.0
the loss for this batch:  0.29252824
flow loss 0.07480028
occ loss 0.21710427
time for this batch 0.5582888126373291
----------------------------------
train loss for this epoch:  0.22757
time for this epoch 83.75726699829102
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  57
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2244146
flow loss 0.061590433
occ loss 0.16236547
time for this batch 0.46871280670166016
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17647523
flow loss 0.050862443
occ loss 0.12519999
time for this batch 0.5159375667572021
----------------------------------
train loss for this epoch:  0.226626
time for this epoch 83.30871868133545
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  58
# batch:  124
i_batch:  0.0
the loss for this batch:  0.18432926
flow loss 0.052428693
occ loss 0.13126166
time for this batch 0.48993515968322754
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2521375
flow loss 0.06600719
occ loss 0.1851895
time for this batch 0.5611279010772705
----------------------------------
train loss for this epoch:  0.22697
time for this epoch 84.04945015907288
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  59
# batch:  124
i_batch:  0.0
the loss for this batch:  0.17502739
flow loss 0.05348824
occ loss 0.121165596
time for this batch 0.5169558525085449
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2283923
flow loss 0.06031867
occ loss 0.16707936
time for this batch 0.5610418319702148
----------------------------------
train loss for this epoch:  0.225742
time for this epoch 83.71819829940796
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  60
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2612831
flow loss 0.06844395
occ loss 0.19236594
time for this batch 0.5125010013580322
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22618423
flow loss 0.06022175
occ loss 0.16525392
time for this batch 0.5704493522644043
----------------------------------
train loss for this epoch:  0.224925
time for this epoch 84.75423264503479
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  61
# batch:  124
i_batch:  0.0
the loss for this batch:  0.20907754
flow loss 0.06040661
occ loss 0.14803141
time for this batch 0.5010745525360107
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16686653
flow loss 0.046193667
occ loss 0.11961773
time for this batch 0.573613166809082
----------------------------------
train loss for this epoch:  0.228046
time for this epoch 87.14471745491028
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  62
# batch:  124
i_batch:  0.0
the loss for this batch:  0.15058775
flow loss 0.048304014
occ loss 0.10188859
time for this batch 0.49124789237976074
----------------------------------
i_batch:  64.0
the loss for this batch:  0.25970793
flow loss 0.0677334
occ loss 0.19068411
time for this batch 0.5468811988830566
----------------------------------
train loss for this epoch:  0.224923
time for this epoch 84.79379606246948
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  63
# batch:  124
i_batch:  0.0
the loss for this batch:  0.238081
flow loss 0.068138726
occ loss 0.16941439
time for this batch 0.4865896701812744
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20004247
flow loss 0.052348297
occ loss 0.14729318
time for this batch 0.43762898445129395
----------------------------------
train loss for this epoch:  0.224857
time for this epoch 83.92381358146667
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  64
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2232662
flow loss 0.060374595
occ loss 0.16224086
time for this batch 0.4583771228790283
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22585294
flow loss 0.0593941
occ loss 0.16591382
time for this batch 0.524878978729248
----------------------------------
train loss for this epoch:  0.226221
time for this epoch 83.49435949325562
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  65
# batch:  124
i_batch:  0.0
the loss for this batch:  0.1997342
flow loss 0.056873366
occ loss 0.14241442
time for this batch 0.4558401107788086
----------------------------------
i_batch:  64.0
the loss for this batch:  0.09245802
flow loss 0.030407967
occ loss 0.06093147
time for this batch 0.5301706790924072
----------------------------------
train loss for this epoch:  0.22364
time for this epoch 84.71480083465576
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  66
# batch:  124
i_batch:  0.0
the loss for this batch:  0.23523402
flow loss 0.06540971
occ loss 0.16892712
time for this batch 0.5254650115966797
----------------------------------
i_batch:  64.0
the loss for this batch:  0.248395
flow loss 0.06461511
occ loss 0.18341507
time for this batch 0.5514125823974609
----------------------------------
train loss for this epoch:  0.223201
time for this epoch 83.52707767486572
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  67
# batch:  124
i_batch:  0.0
the loss for this batch:  0.23560447
flow loss 0.06596967
occ loss 0.16931356
time for this batch 0.4893653392791748
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21563745
flow loss 0.05808709
occ loss 0.15658374
time for this batch 0.5511577129364014
----------------------------------
train loss for this epoch:  0.223417
time for this epoch 84.43798851966858
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  68
# batch:  124
i_batch:  0.0
the loss for this batch:  0.17057884
flow loss 0.05071873
occ loss 0.11947536
time for this batch 0.46851134300231934
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17558567
flow loss 0.047760386
occ loss 0.1274634
time for this batch 0.5859391689300537
----------------------------------
train loss for this epoch:  0.222085
time for this epoch 84.00654625892639
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  69
# batch:  124
i_batch:  0.0
the loss for this batch:  0.24068995
flow loss 0.062557764
occ loss 0.17756632
time for this batch 0.4371509552001953
----------------------------------
i_batch:  64.0
the loss for this batch:  0.24063921
flow loss 0.06901637
occ loss 0.17110391
time for this batch 0.5536627769470215
----------------------------------
train loss for this epoch:  0.222571
time for this epoch 84.74192643165588
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  70
# batch:  124
i_batch:  0.0
the loss for this batch:  0.21558739
flow loss 0.052905805
occ loss 0.1622738
time for this batch 0.4119997024536133
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1985156
flow loss 0.053922687
occ loss 0.14421457
time for this batch 0.4918816089630127
----------------------------------
train loss for this epoch:  0.222036
time for this epoch 81.00452303886414
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  71
# batch:  124
i_batch:  0.0
the loss for this batch:  0.23449889
flow loss 0.062810846
occ loss 0.17128077
time for this batch 0.5333771705627441
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19955376
flow loss 0.05593946
occ loss 0.14325032
time for this batch 0.5648207664489746
----------------------------------
train loss for this epoch:  0.222989
time for this epoch 83.04286789894104
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  72
# batch:  124
i_batch:  0.0
the loss for this batch:  0.25260004
flow loss 0.06339055
occ loss 0.18857889
time for this batch 0.5188643932342529
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2590625
flow loss 0.06494599
occ loss 0.19361646
time for this batch 0.542374849319458
----------------------------------
train loss for this epoch:  0.221685
time for this epoch 84.28779244422913
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  73
# batch:  124
i_batch:  0.0
the loss for this batch:  0.18239285
flow loss 0.04898853
occ loss 0.13250402
time for this batch 0.4933161735534668
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2614807
flow loss 0.07136423
occ loss 0.18952101
time for this batch 0.5702300071716309
----------------------------------
train loss for this epoch:  0.222428
time for this epoch 85.03833436965942
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  74
# batch:  124
i_batch:  0.0
the loss for this batch:  0.22898993
flow loss 0.064025156
occ loss 0.16464588
time for this batch 0.3907203674316406
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2089675
flow loss 0.056500148
occ loss 0.15201274
time for this batch 0.5719122886657715
----------------------------------
train loss for this epoch:  0.222907
time for this epoch 83.37429141998291
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  75
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2456042
flow loss 0.06446991
occ loss 0.18074094
time for this batch 0.4796111583709717
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2383991
flow loss 0.06359564
occ loss 0.1742942
time for this batch 0.5413017272949219
----------------------------------
train loss for this epoch:  0.222221
time for this epoch 84.2179205417633
No_decrease:  7
----------------an epoch starts-------------------
i_epoch:  76
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2569832
flow loss 0.06355791
occ loss 0.19229193
time for this batch 0.5133883953094482
----------------------------------
i_batch:  64.0
the loss for this batch:  0.25156468
flow loss 0.06539758
occ loss 0.18576925
time for this batch 0.5559184551239014
----------------------------------
train loss for this epoch:  0.221677
time for this epoch 85.45092391967773
No_decrease:  8
----------------an epoch starts-------------------
i_epoch:  77
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2563657
flow loss 0.06912639
occ loss 0.18671554
time for this batch 0.5051229000091553
----------------------------------
i_batch:  64.0
the loss for this batch:  0.23010844
flow loss 0.0646355
occ loss 0.1650719
time for this batch 0.5682172775268555
----------------------------------
train loss for this epoch:  0.219998
time for this epoch 83.61398673057556
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  78
# batch:  124
i_batch:  0.0
the loss for this batch:  0.21131738
flow loss 0.05737553
occ loss 0.1536279
time for this batch 0.5171430110931396
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2099907
flow loss 0.059243385
occ loss 0.15040497
time for this batch 0.5562155246734619
----------------------------------
train loss for this epoch:  0.219755
time for this epoch 83.42787790298462
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  79
# batch:  124
i_batch:  0.0
the loss for this batch:  0.24913286
flow loss 0.062082376
occ loss 0.18661289
time for this batch 0.5053348541259766
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21534455
flow loss 0.05823717
occ loss 0.1566792
time for this batch 0.5463688373565674
----------------------------------
train loss for this epoch:  0.219667
time for this epoch 82.60279655456543
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  80
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2235837
flow loss 0.0624936
occ loss 0.16070282
time for this batch 0.5041463375091553
----------------------------------
i_batch:  64.0
the loss for this batch:  0.26113924
flow loss 0.06760566
occ loss 0.19295248
time for this batch 0.5641059875488281
----------------------------------
train loss for this epoch:  0.220784
time for this epoch 83.0617995262146
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  81
# batch:  124
i_batch:  0.0
the loss for this batch:  0.194152
flow loss 0.058472615
occ loss 0.13456748
time for this batch 0.5097784996032715
----------------------------------
i_batch:  64.0
the loss for this batch:  0.13963069
flow loss 0.044537414
occ loss 0.09487984
time for this batch 0.5528428554534912
----------------------------------
train loss for this epoch:  0.220671
time for this epoch 84.13176727294922
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  82
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2493163
flow loss 0.06279669
occ loss 0.18591711
time for this batch 0.48450350761413574
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20866852
flow loss 0.0594654
occ loss 0.14854684
time for this batch 0.5426416397094727
----------------------------------
train loss for this epoch:  0.220739
time for this epoch 75.91315221786499
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  83
# batch:  124
i_batch:  0.0
the loss for this batch:  0.25970268
flow loss 0.0709593
occ loss 0.18797287
time for this batch 0.3966512680053711
----------------------------------
i_batch:  64.0
the loss for this batch:  0.25711325
flow loss 0.064098544
occ loss 0.19218262
time for this batch 0.37326884269714355
----------------------------------
train loss for this epoch:  0.21967
time for this epoch 58.28587555885315
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  84
# batch:  124
i_batch:  0.0
the loss for this batch:  0.17332663
flow loss 0.048310272
occ loss 0.124510884
time for this batch 0.3415234088897705
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1655101
flow loss 0.048248112
occ loss 0.116654746
time for this batch 0.4513218402862549
----------------------------------
train loss for this epoch:  0.218242
time for this epoch 63.807405948638916
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  85
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2115246
flow loss 0.05801209
occ loss 0.15294796
time for this batch 0.3828566074371338
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2159254
flow loss 0.05469317
occ loss 0.16069154
time for this batch 0.41068220138549805
----------------------------------
train loss for this epoch:  0.219016
time for this epoch 66.1804871559143
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  86
# batch:  124
i_batch:  0.0
the loss for this batch:  0.25585258
flow loss 0.06320879
occ loss 0.19217889
time for this batch 0.34677839279174805
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1438767
flow loss 0.04478986
occ loss 0.09867533
time for this batch 0.40975093841552734
----------------------------------
train loss for this epoch:  0.219002
time for this epoch 64.44793891906738
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  87
# batch:  124
i_batch:  0.0
the loss for this batch:  0.27005622
flow loss 0.06624788
occ loss 0.20329665
time for this batch 0.33176326751708984
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17852001
flow loss 0.05324008
occ loss 0.1241627
time for this batch 0.4304773807525635
----------------------------------
train loss for this epoch:  0.218535
time for this epoch 66.7626793384552
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  88
# batch:  124
i_batch:  0.0
the loss for this batch:  0.24100548
flow loss 0.064057216
occ loss 0.17623314
time for this batch 0.3731048107147217
----------------------------------
i_batch:  64.0
the loss for this batch:  0.27682722
flow loss 0.065135844
occ loss 0.2111589
time for this batch 0.4143791198730469
----------------------------------
train loss for this epoch:  0.217886
time for this epoch 65.19604277610779
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  89
# batch:  124
i_batch:  0.0
the loss for this batch:  0.24924393
flow loss 0.06535388
occ loss 0.18331547
time for this batch 0.37300992012023926
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17869867
flow loss 0.049315237
occ loss 0.12912256
time for this batch 0.42612338066101074
----------------------------------
train loss for this epoch:  0.217505
time for this epoch 63.732266426086426
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  90
# batch:  124
i_batch:  0.0
the loss for this batch:  0.23564234
flow loss 0.06574176
occ loss 0.16917528
time for this batch 0.3598670959472656
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16738579
flow loss 0.049109958
occ loss 0.11710283
time for this batch 0.4193565845489502
----------------------------------
train loss for this epoch:  0.218323
time for this epoch 64.63648843765259
No_decrease:  7
----------------an epoch starts-------------------
i_epoch:  91
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2488573
flow loss 0.064949535
occ loss 0.18313944
time for this batch 0.3610410690307617
----------------------------------
i_batch:  64.0
the loss for this batch:  0.25940475
flow loss 0.06806622
occ loss 0.19076364
time for this batch 0.3874030113220215
----------------------------------
train loss for this epoch:  0.217404
time for this epoch 64.59205412864685
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  92
# batch:  124
i_batch:  0.0
the loss for this batch:  0.24365667
flow loss 0.06323199
occ loss 0.17991407
time for this batch 0.3637123107910156
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2591096
flow loss 0.06719441
occ loss 0.19143182
time for this batch 0.384566068649292
----------------------------------
train loss for this epoch:  0.218281
time for this epoch 63.81357216835022
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  93
# batch:  124
i_batch:  0.0
the loss for this batch:  0.15030858
flow loss 0.04397252
occ loss 0.10608012
time for this batch 0.3642585277557373
----------------------------------
i_batch:  64.0
the loss for this batch:  0.23949549
flow loss 0.06476616
occ loss 0.17441328
time for this batch 0.39472031593322754
----------------------------------
train loss for this epoch:  0.217241
time for this epoch 68.19953441619873
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  94
# batch:  124
i_batch:  0.0
the loss for this batch:  0.21005936
flow loss 0.057000864
occ loss 0.1527069
time for this batch 0.38373541831970215
----------------------------------
i_batch:  64.0
the loss for this batch:  0.24292865
flow loss 0.06774516
occ loss 0.17455712
time for this batch 0.34375929832458496
----------------------------------
train loss for this epoch:  0.217591
time for this epoch 66.58888030052185
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  95
# batch:  124
i_batch:  0.0
the loss for this batch:  0.19783774
flow loss 0.05321843
occ loss 0.14421703
time for this batch 0.40873241424560547
----------------------------------
i_batch:  64.0
the loss for this batch:  0.25196713
flow loss 0.06693157
occ loss 0.18446991
time for this batch 0.44325995445251465
----------------------------------
train loss for this epoch:  0.217034
time for this epoch 65.32320761680603
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  96
# batch:  124
i_batch:  0.0
the loss for this batch:  0.24571352
flow loss 0.062460724
occ loss 0.18286
time for this batch 0.3608896732330322
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18070544
flow loss 0.048841663
occ loss 0.13109758
time for this batch 0.45134902000427246
----------------------------------
train loss for this epoch:  0.216379
time for this epoch 64.73498463630676
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  97
# batch:  124
i_batch:  0.0
the loss for this batch:  0.20019133
flow loss 0.054410268
occ loss 0.1455003
time for this batch 0.38420939445495605
----------------------------------
i_batch:  64.0
the loss for this batch:  0.23888063
flow loss 0.060537193
occ loss 0.17778254
time for this batch 0.41312289237976074
----------------------------------
train loss for this epoch:  0.217333
time for this epoch 63.846593618392944
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  98
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2380092
flow loss 0.06025679
occ loss 0.17718326
time for this batch 0.34903812408447266
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14318986
flow loss 0.043290667
occ loss 0.09931365
time for this batch 0.42720746994018555
----------------------------------
train loss for this epoch:  0.217417
time for this epoch 64.64035844802856
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  99
# batch:  124
i_batch:  0.0
the loss for this batch:  0.29479352
flow loss 0.07242925
occ loss 0.22174795
time for this batch 0.3403284549713135
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17862475
flow loss 0.0483349
occ loss 0.12943485
time for this batch 0.44529199600219727
----------------------------------
train loss for this epoch:  0.216041
time for this epoch 64.39432716369629
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  100
# batch:  124
i_batch:  0.0
the loss for this batch:  0.21573491
flow loss 0.056198645
occ loss 0.15868136
time for this batch 0.39453983306884766
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2574591
flow loss 0.06382943
occ loss 0.1931539
time for this batch 0.40993762016296387
----------------------------------
train loss for this epoch:  0.214767
time for this epoch 64.8930995464325
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  101
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2134245
flow loss 0.05830187
occ loss 0.15487315
time for this batch 0.3812541961669922
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21218203
flow loss 0.060551092
occ loss 0.15134591
time for this batch 0.42412424087524414
----------------------------------
train loss for this epoch:  0.216757
time for this epoch 67.26193261146545
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  102
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2045396
flow loss 0.054200795
occ loss 0.14982021
time for this batch 0.3498053550720215
----------------------------------
i_batch:  64.0
the loss for this batch:  0.26640293
flow loss 0.07267667
occ loss 0.19323249
time for this batch 0.42275047302246094
----------------------------------
train loss for this epoch:  0.216787
time for this epoch 62.971516132354736
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  103
# batch:  124
i_batch:  0.0
the loss for this batch:  0.16169177
flow loss 0.0464283
occ loss 0.11476285
time for this batch 0.36420679092407227
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22479574
flow loss 0.057799358
occ loss 0.16623107
time for this batch 0.42071533203125
----------------------------------
train loss for this epoch:  0.215182
time for this epoch 65.72509837150574
No_decrease:  7
----------------an epoch starts-------------------
i_epoch:  104
# batch:  124
i_batch:  0.0
the loss for this batch:  0.20773292
flow loss 0.05433523
occ loss 0.15295874
time for this batch 0.2949657440185547
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17399718
flow loss 0.047477346
occ loss 0.12622824
time for this batch 0.45902514457702637
----------------------------------
train loss for this epoch:  0.213683
time for this epoch 62.31619310379028
No_decrease:  8
----------------an epoch starts-------------------
i_epoch:  105
# batch:  124
i_batch:  0.0
the loss for this batch:  0.238103
flow loss 0.062256362
occ loss 0.17537323
time for this batch 0.33134961128234863
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15134357
flow loss 0.047013268
occ loss 0.103873245
time for this batch 0.26640963554382324
----------------------------------
train loss for this epoch:  0.214808
time for this epoch 57.200127363204956
No_decrease:  9
----------------an epoch starts-------------------
i_epoch:  106
# batch:  124
i_batch:  0.0
the loss for this batch:  0.16489641
flow loss 0.04617712
occ loss 0.11798085
time for this batch 0.31870484352111816
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21705592
flow loss 0.062380318
occ loss 0.15413512
time for this batch 0.26209330558776855
----------------------------------
train loss for this epoch:  0.214462
time for this epoch 41.14203596115112
No_decrease:  10
----------------an epoch starts-------------------
i_epoch:  107
# batch:  124
i_batch:  0.0
the loss for this batch:  0.20263122
flow loss 0.05597266
occ loss 0.14578895
time for this batch 0.2600419521331787
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19281527
flow loss 0.05420233
occ loss 0.1383101
time for this batch 0.25994420051574707
----------------------------------
train loss for this epoch:  0.214594
time for this epoch 41.659284830093384
No_decrease:  11
----------------an epoch starts-------------------
i_epoch:  108
# batch:  124
i_batch:  0.0
the loss for this batch:  0.24151935
flow loss 0.064832576
occ loss 0.17629452
time for this batch 0.3309924602508545
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17401674
flow loss 0.049683288
occ loss 0.12394314
time for this batch 0.3965325355529785
----------------------------------
train loss for this epoch:  0.215671
time for this epoch 58.20651292800903
No_decrease:  12
----------------an epoch starts-------------------
i_epoch:  109
# batch:  124
i_batch:  0.0
the loss for this batch:  0.20750462
flow loss 0.05636002
occ loss 0.15053153
time for this batch 0.3348660469055176
----------------------------------
i_batch:  64.0
the loss for this batch:  0.29331374
flow loss 0.07312722
occ loss 0.21965334
time for this batch 0.4055168628692627
----------------------------------
train loss for this epoch:  0.214796
time for this epoch 56.8858642578125
No_decrease:  13
----------------an epoch starts-------------------
i_epoch:  110
# batch:  124
i_batch:  0.0
the loss for this batch:  0.27448687
flow loss 0.06854689
occ loss 0.20544122
time for this batch 0.3478577136993408
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2272467
flow loss 0.059201792
occ loss 0.16742916
time for this batch 0.3696908950805664
----------------------------------
train loss for this epoch:  0.213343
time for this epoch 58.97466039657593
No_decrease:  14
----------------an epoch starts-------------------
i_epoch:  111
# batch:  124
i_batch:  0.0
the loss for this batch:  0.18690006
flow loss 0.050605986
occ loss 0.13583183
time for this batch 0.32954835891723633
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20575587
flow loss 0.057697132
occ loss 0.14765014
time for this batch 0.33835363388061523
----------------------------------
train loss for this epoch:  0.216044
time for this epoch 58.714296102523804
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  112
# batch:  124
i_batch:  0.0
the loss for this batch:  0.14111437
flow loss 0.037440326
occ loss 0.103454225
time for this batch 0.31122541427612305
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22837189
flow loss 0.059179734
occ loss 0.16875184
time for this batch 0.40096378326416016
----------------------------------
train loss for this epoch:  0.212889
time for this epoch 58.295522928237915
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  113
# batch:  124
i_batch:  0.0
the loss for this batch:  0.24723035
flow loss 0.06261761
occ loss 0.18422268
time for this batch 0.2521324157714844
----------------------------------
i_batch:  64.0
the loss for this batch:  0.16015509
flow loss 0.0471026
occ loss 0.11274724
time for this batch 0.24814581871032715
----------------------------------
train loss for this epoch:  0.214111
time for this epoch 41.10842943191528
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  114
# batch:  124
i_batch:  0.0
the loss for this batch:  0.24414706
flow loss 0.06352549
occ loss 0.18022478
time for this batch 0.34656786918640137
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19031522
flow loss 0.053971242
occ loss 0.13598225
time for this batch 0.2336595058441162
----------------------------------
train loss for this epoch:  0.213667
time for this epoch 44.37557649612427
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  115
# batch:  124
i_batch:  0.0
the loss for this batch:  0.18822406
flow loss 0.056590986
occ loss 0.13124214
time for this batch 0.32941126823425293
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18674666
flow loss 0.054276988
occ loss 0.13207881
time for this batch 0.4092593193054199
----------------------------------
train loss for this epoch:  0.213131
time for this epoch 59.95316767692566
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  116
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2657124
flow loss 0.07082158
occ loss 0.19416536
time for this batch 0.32158589363098145
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1665719
flow loss 0.050768744
occ loss 0.11532974
time for this batch 0.34326982498168945
----------------------------------
train loss for this epoch:  0.214013
time for this epoch 59.2137234210968
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  117
# batch:  124
i_batch:  0.0
the loss for this batch:  0.23465654
flow loss 0.06439703
occ loss 0.1696608
time for this batch 0.3290081024169922
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19336456
flow loss 0.051385626
occ loss 0.14155245
time for this batch 0.3926842212677002
----------------------------------
train loss for this epoch:  0.213361
time for this epoch 57.503268241882324
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  118
# batch:  124
i_batch:  0.0
the loss for this batch:  0.21789037
flow loss 0.055062406
occ loss 0.16250901
time for this batch 0.34218811988830566
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1712883
flow loss 0.05041106
occ loss 0.12045927
time for this batch 0.4058713912963867
----------------------------------
train loss for this epoch:  0.212852
time for this epoch 58.86929368972778
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  119
# batch:  124
i_batch:  0.0
the loss for this batch:  0.17528003
flow loss 0.053530283
occ loss 0.12130909
time for this batch 0.329134464263916
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19667298
flow loss 0.051072173
occ loss 0.14512739
time for this batch 0.37235140800476074
----------------------------------
train loss for this epoch:  0.212132
time for this epoch 56.851621866226196
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  120
# batch:  124
i_batch:  0.0
the loss for this batch:  0.25446582
flow loss 0.06374957
occ loss 0.19032268
time for this batch 0.33365964889526367
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19475009
flow loss 0.050643656
occ loss 0.1436976
time for this batch 0.3743758201599121
----------------------------------
train loss for this epoch:  0.212089
time for this epoch 58.11374640464783
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  121
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2357614
flow loss 0.06359565
occ loss 0.17152534
time for this batch 0.33023571968078613
----------------------------------
i_batch:  64.0
the loss for this batch:  0.26734248
flow loss 0.069599114
occ loss 0.19725154
time for this batch 0.3789551258087158
----------------------------------
train loss for this epoch:  0.212328
time for this epoch 58.01237773895264
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  122
# batch:  124
i_batch:  0.0
the loss for this batch:  0.15853867
flow loss 0.045718953
occ loss 0.11240147
time for this batch 0.3401463031768799
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22221285
flow loss 0.058982007
occ loss 0.16269185
time for this batch 0.3529651165008545
----------------------------------
train loss for this epoch:  0.212162
time for this epoch 57.63170576095581
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  123
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2018946
flow loss 0.05559397
occ loss 0.14579919
time for this batch 0.33118128776550293
----------------------------------
i_batch:  64.0
the loss for this batch:  0.24008688
flow loss 0.064188465
occ loss 0.1755903
time for this batch 0.398700475692749
----------------------------------
train loss for this epoch:  0.212483
time for this epoch 60.74751377105713
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  124
# batch:  124
i_batch:  0.0
the loss for this batch:  0.23245932
flow loss 0.06359074
occ loss 0.16794759
time for this batch 0.35729026794433594
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19910087
flow loss 0.055807125
occ loss 0.14265375
time for this batch 0.42996883392333984
----------------------------------
train loss for this epoch:  0.213169
time for this epoch 63.04020881652832
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  125
# batch:  124
i_batch:  0.0
the loss for this batch:  0.22786051
flow loss 0.058274772
occ loss 0.16921172
time for this batch 0.35199856758117676
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2356522
flow loss 0.060426358
occ loss 0.17442141
time for this batch 0.4379091262817383
----------------------------------
train loss for this epoch:  0.211647
time for this epoch 62.66764259338379
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  126
# batch:  124
i_batch:  0.0
the loss for this batch:  0.20944226
flow loss 0.053410828
occ loss 0.15555628
time for this batch 0.3602118492126465
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17867535
flow loss 0.05169825
occ loss 0.12661529
time for this batch 0.40515661239624023
----------------------------------
train loss for this epoch:  0.210516
time for this epoch 63.08777213096619
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  127
# batch:  124
i_batch:  0.0
the loss for this batch:  0.21900482
flow loss 0.057374638
occ loss 0.16118006
time for this batch 0.3616960048675537
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2811212
flow loss 0.07349326
occ loss 0.20681731
time for this batch 0.4117138385772705
----------------------------------
train loss for this epoch:  0.211648
time for this epoch 61.78906559944153
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  128
# batch:  124
i_batch:  0.0
the loss for this batch:  0.25462466
flow loss 0.06884809
occ loss 0.18537186
time for this batch 0.3575119972229004
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2385649
flow loss 0.06152988
occ loss 0.17660514
time for this batch 0.4320363998413086
----------------------------------
train loss for this epoch:  0.212408
time for this epoch 62.1650071144104
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  129
# batch:  124
i_batch:  0.0
the loss for this batch:  0.20966715
flow loss 0.05590648
occ loss 0.15330873
time for this batch 0.34296655654907227
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20963082
flow loss 0.055681325
occ loss 0.15350123
time for this batch 0.32584381103515625
----------------------------------
train loss for this epoch:  0.211052
time for this epoch 60.519129276275635
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  130
# batch:  124
i_batch:  0.0
the loss for this batch:  0.20142719
flow loss 0.055472672
occ loss 0.14551473
time for this batch 0.36144518852233887
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22712024
flow loss 0.062241502
occ loss 0.16458537
time for this batch 0.4328653812408447
----------------------------------
train loss for this epoch:  0.211388
time for this epoch 62.387415409088135
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  131
# batch:  124
i_batch:  0.0
the loss for this batch:  0.23723581
flow loss 0.06240287
occ loss 0.17456643
time for this batch 0.35629749298095703
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21932074
flow loss 0.057142112
occ loss 0.16140407
time for this batch 0.36684489250183105
----------------------------------
train loss for this epoch:  0.210213
time for this epoch 62.839929819107056
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  132
# batch:  124
i_batch:  0.0
the loss for this batch:  0.19222717
flow loss 0.050523326
occ loss 0.1413409
time for this batch 0.3603208065032959
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21093296
flow loss 0.058264073
occ loss 0.15225379
time for this batch 0.4402194023132324
----------------------------------
train loss for this epoch:  0.211404
time for this epoch 63.106489181518555
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  133
# batch:  124
i_batch:  0.0
the loss for this batch:  0.23303804
flow loss 0.0621917
occ loss 0.17029606
time for this batch 0.4035491943359375
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18845928
flow loss 0.05603091
occ loss 0.13170275
time for this batch 0.374406099319458
----------------------------------
train loss for this epoch:  0.211763
time for this epoch 57.71715831756592
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  134
# batch:  124
i_batch:  0.0
the loss for this batch:  0.25089344
flow loss 0.06378613
occ loss 0.18669736
time for this batch 0.34687089920043945
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20964447
flow loss 0.05659702
occ loss 0.15271002
time for this batch 0.40065455436706543
----------------------------------
train loss for this epoch:  0.211336
time for this epoch 57.46626615524292
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  135
# batch:  124
i_batch:  0.0
the loss for this batch:  0.12839548
flow loss 0.03917844
occ loss 0.08895057
time for this batch 0.3155517578125
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20438705
flow loss 0.053216655
occ loss 0.15080306
time for this batch 0.3652007579803467
----------------------------------
train loss for this epoch:  0.2109
time for this epoch 55.100343227386475
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  136
# batch:  124
i_batch:  0.0
the loss for this batch:  0.26169503
flow loss 0.07100421
occ loss 0.18998466
time for this batch 0.3195817470550537
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20967239
flow loss 0.05539957
occ loss 0.15391056
time for this batch 0.44963574409484863
----------------------------------
train loss for this epoch:  0.210661
time for this epoch 55.77916955947876
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  137
# batch:  124
i_batch:  0.0
the loss for this batch:  0.24145761
flow loss 0.066245355
occ loss 0.17486273
time for this batch 0.3163633346557617
----------------------------------
i_batch:  64.0
the loss for this batch:  0.23250966
flow loss 0.06107978
occ loss 0.17100084
time for this batch 0.4007406234741211
----------------------------------
train loss for this epoch:  0.210148
time for this epoch 57.8660614490509
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  138
# batch:  124
i_batch:  0.0
the loss for this batch:  0.18655327
flow loss 0.051386498
occ loss 0.13448955
time for this batch 0.3242626190185547
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17868944
flow loss 0.049291525
occ loss 0.12894908
time for this batch 0.3900940418243408
----------------------------------
train loss for this epoch:  0.212791
time for this epoch 57.2260057926178
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  139
# batch:  124
i_batch:  0.0
the loss for this batch:  0.26305935
flow loss 0.065711804
occ loss 0.19675544
time for this batch 0.32539868354797363
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15924144
flow loss 0.047835477
occ loss 0.11067772
time for this batch 0.3852198123931885
----------------------------------
train loss for this epoch:  0.210379
time for this epoch 57.41444730758667
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  140
# batch:  124
i_batch:  0.0
the loss for this batch:  0.20568128
flow loss 0.05638786
occ loss 0.14866269
time for this batch 0.33144092559814453
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2230156
flow loss 0.055464797
occ loss 0.16688587
time for this batch 0.3542160987854004
----------------------------------
train loss for this epoch:  0.208321
time for this epoch 54.61547374725342
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  141
# batch:  124
i_batch:  0.0
the loss for this batch:  0.20049901
flow loss 0.054868493
occ loss 0.14501147
time for this batch 0.33043932914733887
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21153182
flow loss 0.055103827
occ loss 0.1559642
time for this batch 0.37822604179382324
----------------------------------
train loss for this epoch:  0.209597
time for this epoch 55.94676470756531
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  142
# batch:  124
i_batch:  0.0
the loss for this batch:  0.23260796
flow loss 0.061640672
occ loss 0.17042412
time for this batch 0.38171958923339844
----------------------------------
i_batch:  64.0
the loss for this batch:  0.24828191
flow loss 0.061711617
occ loss 0.18611053
time for this batch 0.39584827423095703
----------------------------------
train loss for this epoch:  0.208536
time for this epoch 60.954046964645386
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  143
# batch:  124
i_batch:  0.0
the loss for this batch:  0.18802483
flow loss 0.054637015
occ loss 0.1331015
time for this batch 0.3029594421386719
----------------------------------
i_batch:  64.0
the loss for this batch:  0.23589516
flow loss 0.062341034
occ loss 0.17311572
time for this batch 0.393918514251709
----------------------------------
train loss for this epoch:  0.209314
time for this epoch 59.03946304321289
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  144
# batch:  124
i_batch:  0.0
the loss for this batch:  0.118708044
flow loss 0.03909263
occ loss 0.07850284
time for this batch 0.4381732940673828
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18274121
flow loss 0.047567967
occ loss 0.13485233
time for this batch 0.4035453796386719
----------------------------------
train loss for this epoch:  0.208598
time for this epoch 57.75765681266785
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  145
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2507744
flow loss 0.06310544
occ loss 0.1872768
time for this batch 0.2305889129638672
----------------------------------
i_batch:  64.0
the loss for this batch:  0.24802941
flow loss 0.06281773
occ loss 0.18450153
time for this batch 0.3968350887298584
----------------------------------
train loss for this epoch:  0.209867
time for this epoch 55.47022008895874
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  146
# batch:  124
i_batch:  0.0
the loss for this batch:  0.29954812
flow loss 0.06921609
occ loss 0.22972772
time for this batch 0.3246889114379883
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22500479
flow loss 0.0573374
occ loss 0.1668308
time for this batch 0.39711666107177734
----------------------------------
train loss for this epoch:  0.20911
time for this epoch 56.8296856880188
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  147
# batch:  124
i_batch:  0.0
the loss for this batch:  0.25088832
flow loss 0.062490687
occ loss 0.1879457
time for this batch 0.32751035690307617
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2351153
flow loss 0.065643124
occ loss 0.16864873
time for this batch 0.35750532150268555
----------------------------------
train loss for this epoch:  0.208525
time for this epoch 56.40230441093445
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  148
# batch:  124
i_batch:  0.0
the loss for this batch:  0.17811966
flow loss 0.04635173
occ loss 0.13132773
time for this batch 0.33146071434020996
----------------------------------
i_batch:  64.0
the loss for this batch:  0.24723853
flow loss 0.06250084
occ loss 0.18421838
time for this batch 0.3922460079193115
----------------------------------
train loss for this epoch:  0.210477
time for this epoch 55.23791718482971
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  149
# batch:  124
i_batch:  0.0
the loss for this batch:  0.24258958
flow loss 0.06066562
occ loss 0.18086196
time for this batch 0.3640873432159424
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17229806
flow loss 0.048743334
occ loss 0.12327089
time for this batch 0.38468265533447266
----------------------------------
train loss for this epoch:  0.209135
time for this epoch 54.53561472892761
No_decrease:  7
----------------an epoch starts-------------------
i_epoch:  150
# batch:  124
i_batch:  0.0
the loss for this batch:  0.22544672
flow loss 0.05770968
occ loss 0.16728501
time for this batch 0.33144140243530273
----------------------------------
i_batch:  64.0
the loss for this batch:  0.18731081
flow loss 0.049349204
occ loss 0.13764594
time for this batch 0.377960205078125
----------------------------------
train loss for this epoch:  0.202435
time for this epoch 52.38698768615723
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  151
# batch:  124
i_batch:  0.0
the loss for this batch:  0.17508927
flow loss 0.047447246
occ loss 0.12732676
time for this batch 0.3133730888366699
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19363573
flow loss 0.05268562
occ loss 0.14049539
time for this batch 0.28293871879577637
----------------------------------
train loss for this epoch:  0.201451
time for this epoch 57.03261470794678
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  152
# batch:  124
i_batch:  0.0
the loss for this batch:  0.26563343
flow loss 0.06594435
occ loss 0.19922738
time for this batch 0.3286252021789551
----------------------------------
i_batch:  64.0
the loss for this batch:  0.23037255
flow loss 0.0602396
occ loss 0.16946813
time for this batch 0.37340688705444336
----------------------------------
train loss for this epoch:  0.200915
time for this epoch 56.174049377441406
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  153
# batch:  124
i_batch:  0.0
the loss for this batch:  0.20124084
flow loss 0.052033015
occ loss 0.14890341
time for this batch 0.3166790008544922
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2519251
flow loss 0.06598279
occ loss 0.18527418
time for this batch 0.3811650276184082
----------------------------------
train loss for this epoch:  0.200775
time for this epoch 58.469826221466064
No_decrease:  0
----------------an epoch starts-------------------
i_epoch:  154
# batch:  124
i_batch:  0.0
the loss for this batch:  0.20889975
flow loss 0.053432237
occ loss 0.15502374
time for this batch 0.32251763343811035
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22290191
flow loss 0.060060672
occ loss 0.1625975
time for this batch 0.3997325897216797
----------------------------------
train loss for this epoch:  0.200893
time for this epoch 56.725106716156006
No_decrease:  1
----------------an epoch starts-------------------
i_epoch:  155
# batch:  124
i_batch:  0.0
the loss for this batch:  0.19274151
flow loss 0.049330827
occ loss 0.14290263
time for this batch 0.3756372928619385
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2405879
flow loss 0.062027548
occ loss 0.17813131
time for this batch 0.373929500579834
----------------------------------
train loss for this epoch:  0.200564
time for this epoch 56.55095100402832
No_decrease:  2
----------------an epoch starts-------------------
i_epoch:  156
# batch:  124
i_batch:  0.0
the loss for this batch:  0.20904471
flow loss 0.053231772
occ loss 0.15553063
time for this batch 0.31839919090270996
----------------------------------
i_batch:  64.0
the loss for this batch:  0.26152524
flow loss 0.062182486
occ loss 0.19885466
time for this batch 0.39647412300109863
----------------------------------
train loss for this epoch:  0.20033
time for this epoch 55.876933336257935
No_decrease:  3
----------------an epoch starts-------------------
i_epoch:  157
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2024745
flow loss 0.052926965
occ loss 0.1490985
time for this batch 0.3296177387237549
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19632286
flow loss 0.051903866
occ loss 0.14401366
time for this batch 0.37854623794555664
----------------------------------
train loss for this epoch:  0.200437
time for this epoch 55.80081081390381
No_decrease:  4
----------------an epoch starts-------------------
i_epoch:  158
# batch:  124
i_batch:  0.0
the loss for this batch:  0.17137629
flow loss 0.044725366
occ loss 0.12636457
time for this batch 0.3051869869232178
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22080636
flow loss 0.05838503
occ loss 0.16188744
time for this batch 0.3952903747558594
----------------------------------
train loss for this epoch:  0.200193
time for this epoch 58.54358196258545
No_decrease:  5
----------------an epoch starts-------------------
i_epoch:  159
# batch:  124
i_batch:  0.0
the loss for this batch:  0.19230828
flow loss 0.05286432
occ loss 0.13920106
time for this batch 0.3206136226654053
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19902703
flow loss 0.051997975
occ loss 0.14648339
time for this batch 0.3959314823150635
----------------------------------
train loss for this epoch:  0.200717
time for this epoch 56.646005392074585
No_decrease:  6
----------------an epoch starts-------------------
i_epoch:  160
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2040258
flow loss 0.053250093
occ loss 0.15035458
time for this batch 0.3132593631744385
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17942348
flow loss 0.04833226
occ loss 0.1305807
time for this batch 0.3535318374633789
----------------------------------
train loss for this epoch:  0.200431
time for this epoch 54.9258553981781
No_decrease:  7
----------------an epoch starts-------------------
i_epoch:  161
# batch:  124
i_batch:  0.0
the loss for this batch:  0.20021416
flow loss 0.056202143
occ loss 0.14349008
time for this batch 0.3282589912414551
----------------------------------
i_batch:  64.0
the loss for this batch:  0.1399902
flow loss 0.037202265
occ loss 0.10241886
time for this batch 0.3782074451446533
----------------------------------
train loss for this epoch:  0.200429
time for this epoch 55.44337034225464
No_decrease:  8
----------------an epoch starts-------------------
i_epoch:  162
# batch:  124
i_batch:  0.0
the loss for this batch:  0.19886035
flow loss 0.05563831
occ loss 0.14292222
time for this batch 0.34711527824401855
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17243548
flow loss 0.049807314
occ loss 0.12234077
time for this batch 0.3935377597808838
----------------------------------
train loss for this epoch:  0.200075
time for this epoch 61.11107039451599
No_decrease:  9
----------------an epoch starts-------------------
i_epoch:  163
# batch:  124
i_batch:  0.0
the loss for this batch:  0.23114097
flow loss 0.061999347
occ loss 0.16866058
time for this batch 0.31491565704345703
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2809842
flow loss 0.069364846
occ loss 0.21109927
time for this batch 0.3996410369873047
----------------------------------
train loss for this epoch:  0.199811
time for this epoch 58.52831983566284
No_decrease:  10
----------------an epoch starts-------------------
i_epoch:  164
# batch:  124
i_batch:  0.0
the loss for this batch:  0.2834407
flow loss 0.06690343
occ loss 0.21584778
time for this batch 0.3237423896789551
----------------------------------
i_batch:  64.0
the loss for this batch:  0.24472028
flow loss 0.06297133
occ loss 0.18131305
time for this batch 0.3737924098968506
----------------------------------
train loss for this epoch:  0.199962
time for this epoch 56.210179805755615
No_decrease:  11
----------------an epoch starts-------------------
i_epoch:  165
# batch:  124
i_batch:  0.0
the loss for this batch:  0.17034414
flow loss 0.048383396
occ loss 0.12149709
time for this batch 0.33112406730651855
----------------------------------
i_batch:  64.0
the loss for this batch:  0.21689038
flow loss 0.05453071
occ loss 0.16199881
time for this batch 0.29999828338623047
----------------------------------
train loss for this epoch:  0.199825
time for this epoch 55.738481760025024
No_decrease:  12
----------------an epoch starts-------------------
i_epoch:  166
# batch:  124
i_batch:  0.0
the loss for this batch:  0.10772258
flow loss 0.03342322
occ loss 0.07410841
time for this batch 0.31736326217651367
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19441256
flow loss 0.05440215
occ loss 0.1395647
time for this batch 0.39225006103515625
----------------------------------
train loss for this epoch:  0.199518
time for this epoch 56.08292603492737
No_decrease:  13
----------------an epoch starts-------------------
i_epoch:  167
# batch:  124
i_batch:  0.0
the loss for this batch:  0.15875356
flow loss 0.045137875
occ loss 0.1133022
time for this batch 0.3229942321777344
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19465214
flow loss 0.052872855
occ loss 0.14145398
time for this batch 0.36229658126831055
----------------------------------
train loss for this epoch:  0.200107
time for this epoch 56.11011481285095
No_decrease:  14
----------------an epoch starts-------------------
i_epoch:  168
# batch:  124
i_batch:  0.0
the loss for this batch:  0.21879724
flow loss 0.05786957
occ loss 0.16057618
time for this batch 0.31800079345703125
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2172382
flow loss 0.053359993
occ loss 0.16337526
time for this batch 0.3501579761505127
----------------------------------
train loss for this epoch:  0.200058
time for this epoch 55.60925602912903
No_decrease:  15
----------------an epoch starts-------------------
i_epoch:  169
# batch:  124
i_batch:  0.0
the loss for this batch:  0.1808788
flow loss 0.048703298
occ loss 0.13191599
time for this batch 0.3301239013671875
----------------------------------
i_batch:  64.0
the loss for this batch:  0.2199905
flow loss 0.058485154
occ loss 0.1611257
time for this batch 0.311176061630249
----------------------------------
train loss for this epoch:  0.199617
time for this epoch 54.43928098678589
No_decrease:  16
----------------an epoch starts-------------------
i_epoch:  170
# batch:  124
i_batch:  0.0
the loss for this batch:  0.21177006
flow loss 0.054368496
occ loss 0.15688673
time for this batch 0.328113317489624
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14446528
flow loss 0.03993397
occ loss 0.10417684
time for this batch 0.40082573890686035
----------------------------------
train loss for this epoch:  0.199684
time for this epoch 58.476709604263306
No_decrease:  17
----------------an epoch starts-------------------
i_epoch:  171
# batch:  124
i_batch:  0.0
the loss for this batch:  0.19083975
flow loss 0.055276893
occ loss 0.1352184
time for this batch 0.3612687587738037
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19561857
flow loss 0.054166567
occ loss 0.14100814
time for this batch 0.35644960403442383
----------------------------------
train loss for this epoch:  0.199521
time for this epoch 57.135366678237915
No_decrease:  18
----------------an epoch starts-------------------
i_epoch:  172
# batch:  124
i_batch:  0.0
the loss for this batch:  0.1928622
flow loss 0.051509194
occ loss 0.14107038
time for this batch 0.3701510429382324
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19597591
flow loss 0.046939213
occ loss 0.14872263
time for this batch 0.38524365425109863
----------------------------------
train loss for this epoch:  0.200151
time for this epoch 59.76276922225952
No_decrease:  19
----------------an epoch starts-------------------
i_epoch:  173
# batch:  124
i_batch:  0.0
the loss for this batch:  0.1845787
flow loss 0.04765564
occ loss 0.13638231
time for this batch 0.3559544086456299
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17295474
flow loss 0.047152232
occ loss 0.12546195
time for this batch 0.4039750099182129
----------------------------------
train loss for this epoch:  0.199408
time for this epoch 63.36862087249756
No_decrease:  20
----------------an epoch starts-------------------
i_epoch:  174
# batch:  124
i_batch:  0.0
the loss for this batch:  0.22580706
flow loss 0.05823025
occ loss 0.16712044
time for this batch 0.31988024711608887
----------------------------------
i_batch:  64.0
the loss for this batch:  0.17894198
flow loss 0.044487398
occ loss 0.13414462
time for this batch 0.3731677532196045
----------------------------------
train loss for this epoch:  0.199229
time for this epoch 55.58278465270996
No_decrease:  21
----------------an epoch starts-------------------
i_epoch:  175
# batch:  124
i_batch:  0.0
the loss for this batch:  0.17280304
flow loss 0.046749987
occ loss 0.12570395
time for this batch 0.3219294548034668
----------------------------------
i_batch:  64.0
the loss for this batch:  0.22283792
flow loss 0.060657848
occ loss 0.16156921
time for this batch 0.3425319194793701
----------------------------------
train loss for this epoch:  0.199232
time for this epoch 57.008519887924194
No_decrease:  22
----------------an epoch starts-------------------
i_epoch:  176
# batch:  124
i_batch:  0.0
the loss for this batch:  0.18974896
flow loss 0.05080864
occ loss 0.13866563
time for this batch 0.2302708625793457
----------------------------------
i_batch:  64.0
the loss for this batch:  0.14368291
flow loss 0.039795715
occ loss 0.10368629
time for this batch 0.3527047634124756
----------------------------------
train loss for this epoch:  0.199424
time for this epoch 54.17574906349182
No_decrease:  23
----------------an epoch starts-------------------
i_epoch:  177
# batch:  124
i_batch:  0.0
the loss for this batch:  0.1768179
flow loss 0.049606007
occ loss 0.12684333
time for this batch 0.31728219985961914
----------------------------------
i_batch:  64.0
the loss for this batch:  0.27652803
flow loss 0.06752767
occ loss 0.20860232
time for this batch 0.3870093822479248
----------------------------------
train loss for this epoch:  0.200167
time for this epoch 56.26906442642212
No_decrease:  24
----------------an epoch starts-------------------
i_epoch:  178
# batch:  124
i_batch:  0.0
the loss for this batch:  0.14798017
flow loss 0.040723022
occ loss 0.106731944
time for this batch 0.36989784240722656
----------------------------------
i_batch:  64.0
the loss for this batch:  0.25925997
flow loss 0.06367692
occ loss 0.19512664
time for this batch 0.40022897720336914
----------------------------------
train loss for this epoch:  0.199341
time for this epoch 58.61595916748047
No_decrease:  25
----------------an epoch starts-------------------
i_epoch:  179
# batch:  124
i_batch:  0.0
the loss for this batch:  0.21903257
flow loss 0.058848016
occ loss 0.15949906
time for this batch 0.3302037715911865
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20683789
flow loss 0.05416989
occ loss 0.1523794
time for this batch 0.37154459953308105
----------------------------------
train loss for this epoch:  0.199141
time for this epoch 55.49847769737244
No_decrease:  26
----------------an epoch starts-------------------
i_epoch:  180
# batch:  124
i_batch:  0.0
the loss for this batch:  0.19200328
flow loss 0.05027941
occ loss 0.14131056
time for this batch 0.307861328125
----------------------------------
i_batch:  64.0
the loss for this batch:  0.20549263
flow loss 0.05606951
occ loss 0.14910114
time for this batch 0.3742406368255615
----------------------------------
train loss for this epoch:  0.199216
time for this epoch 55.968082666397095
No_decrease:  27
----------------an epoch starts-------------------
i_epoch:  181
# batch:  124
i_batch:  0.0
the loss for this batch:  0.16034037
flow loss 0.043130234
occ loss 0.11686816
time for this batch 0.3193199634552002
----------------------------------
i_batch:  64.0
the loss for this batch:  0.23929529
flow loss 0.06061436
occ loss 0.17818958
time for this batch 0.39344072341918945
----------------------------------
train loss for this epoch:  0.199083
time for this epoch 55.5471670627594
No_decrease:  28
----------------an epoch starts-------------------
i_epoch:  182
# batch:  124
i_batch:  0.0
the loss for this batch:  0.15563929
flow loss 0.041567057
occ loss 0.11375057
time for this batch 0.3339273929595947
----------------------------------
i_batch:  64.0
the loss for this batch:  0.15054493
flow loss 0.04300888
occ loss 0.10729003
time for this batch 0.3837265968322754
----------------------------------
train loss for this epoch:  0.199246
time for this epoch 56.56777548789978
No_decrease:  29
----------------an epoch starts-------------------
i_epoch:  183
# batch:  124
i_batch:  0.0
the loss for this batch:  0.23317528
flow loss 0.055783752
occ loss 0.17700909
time for this batch 0.33321380615234375
----------------------------------
i_batch:  64.0
the loss for this batch:  0.19953547
flow loss 0.052990597
occ loss 0.1462005
time for this batch 0.3809478282928467
----------------------------------
train loss for this epoch:  0.198932
time for this epoch 57.19133543968201
Early stop at the 184-th epoch

7: apply the model to vali and test¶

In [13]:
def apply_to_vali_test(model, vt, f_o_mean_std):
    f = vt["flow"]
    f_m = vt["flow_mask"].to(device)
    o = vt["occupancy"]
    o_m = vt["occupancy_mask"].to(device)
    f_mae, f_rmse, o_mae, o_rmse  = vali_test(model, f, f_m, o, o_m, f_o_mean_std, hyper["b_s_vt"])
    print ("flow_mae", f_mae)
    print ("flow_rmse", f_rmse)
    print ("occ_mae", o_mae)
    print ("occ_rmse", o_rmse)
    return f_mae, f_rmse, o_mae, o_rmse

Validate¶

In [14]:
vali_f_mae, vali_f_rmse, vali_o_mae, vali_o_rmse =\
    apply_to_vali_test(trained_model, vali, f_o_mean_std)
flow_mae 34.83063394313236
flow_rmse 53.87441952162064
occ_mae 0.03831921818638007
occ_rmse 0.07841136881949592

Test¶

In [15]:
test_f_mae, test_f_rmse, test_o_mae, test_o_rmse =\
    apply_to_vali_test(trained_model, test, f_o_mean_std)
flow_mae 33.03770116332661
flow_rmse 51.03821776888166
occ_mae 0.0314599494901205
occ_rmse 0.06736482214882997
In [ ]:
 
In [ ]:
 
In [ ]: